Long short-term memory is an artificial recurrent neural network architecture used in the field of deep learning. Unlike standard feedforward neural networks, LSTM has feedback connections. It can process not only single data points, but also entire sequences of data.
LSTM is a type of brain-like system called a neural network. It’s special because it can remember things for a long time. This is really useful when you’re dealing with things that change over time, like weather patterns, stock prices, or language.
Here’s how it works:
Input Gate: Decides what new information we’re going to store in memory.
import pandas as pd
import numpy as np
import math
import datetime as dt
import warnings
warnings.simplefilter(action="ignore", category=FutureWarning)
df=pd.read_csv(r"C:\Users\Armaan\Downloads\IDEA.NS.csv")
df.head()
| Date | Open | High | Low | Close | Adj Close | Volume | |
|---|---|---|---|---|---|---|---|
| 0 | 28-10-2010 | 40.700409 | 41.273228 | 38.710609 | 39.434174 | 38.824368 | 12280665.0 |
| 1 | 29-10-2010 | 40.067291 | 40.790855 | 39.524620 | 40.670261 | 40.041344 | 5692358.0 |
| 2 | 01-11-2010 | 40.851151 | 42.087238 | 40.851151 | 41.393822 | 40.753716 | 6709782.0 |
| 3 | 02-11-2010 | 41.574715 | 41.574715 | 40.368774 | 40.519520 | 39.892933 | 3966238.0 |
| 4 | 03-11-2010 | 41.876198 | 41.876198 | 39.886398 | 40.097439 | 39.477379 | 4179627.0 |
df.tail()
| Date | Open | High | Low | Close | Adj Close | Volume | |
|---|---|---|---|---|---|---|---|
| 3207 | 20-10-2023 | 12.00 | 12.10 | 11.65 | 11.75 | 11.75 | 150616671.0 |
| 3208 | 23-10-2023 | 11.80 | 11.90 | 10.85 | 10.95 | 10.95 | 229385426.0 |
| 3209 | 25-10-2023 | 11.00 | 11.20 | 10.60 | 10.85 | 10.85 | 259001685.0 |
| 3210 | 26-10-2023 | 10.85 | 10.95 | 10.50 | 10.75 | 10.75 | 163742432.0 |
| 3211 | 27-10-2023 | 10.65 | 11.05 | 10.65 | 10.90 | 10.90 | 207190392.0 |
df.describe()
| Open | High | Low | Close | Adj Close | Volume | |
|---|---|---|---|---|---|---|
| count | 3206.000000 | 3206.000000 | 3206.000000 | 3206.000000 | 3206.000000 | 3.206000e+03 |
| mean | 44.186779 | 45.022857 | 43.304874 | 44.117548 | 43.703432 | 9.045426e+07 |
| std | 32.502808 | 33.016211 | 31.960267 | 32.466469 | 32.042595 | 1.595767e+08 |
| min | 2.700000 | 3.150000 | 2.400000 | 2.950000 | 2.950000 | 0.000000e+00 |
| 25% | 10.100000 | 10.362500 | 9.812500 | 10.050000 | 10.050000 | 6.884168e+06 |
| 50% | 45.267899 | 45.991461 | 44.604631 | 45.222675 | 44.870220 | 1.801413e+07 |
| 75% | 65.723625 | 67.080299 | 64.412164 | 65.806534 | 65.181887 | 1.039615e+08 |
| max | 122.402710 | 123.005676 | 119.568756 | 122.251968 | 120.885239 | 1.073742e+09 |
print(df.columns)
Index(['Date', 'Open', 'High', 'Low', 'Close', 'Adj Close', 'Volume'], dtype='object')
df.isnull().sum()
Date 0 Open 6 High 6 Low 6 Close 6 Adj Close 6 Volume 6 dtype: int64
df=df.dropna()
df.isnull().sum()
Date 0 Open 0 High 0 Low 0 Close 0 Adj Close 0 Volume 0 dtype: int64
df.shape
(3206, 7)
sd=df.iloc[0][0]
ed=df.iloc[-1][0]
print('Starting Date',sd)
print('Ending Date',ed)
Starting Date 28-10-2010 Ending Date 27-10-2023
import matplotlib.pyplot as plt
import seaborn as sns
%matplotlib inline
C:\Users\Armaan\anaconda3\lib\site-packages\scipy\__init__.py:155: UserWarning: A NumPy version >=1.18.5 and <1.25.0 is required for this version of SciPy (detected version 1.26.1
warnings.warn(f"A NumPy version >={np_minversion} and <{np_maxversion}"
fig, axs = plt.subplots(2, 2, figsize=(18, 11))
# Create KDE plots with labels
sns.kdeplot(df.Open, shade=True, color='Green', alpha=0.5, ax=axs[0, 0], label='Open')
sns.kdeplot(df.High, shade=True, color='orange', alpha=0.5, ax=axs[0, 1], label='High')
sns.kdeplot(df.Low, shade=True, color='violet', alpha=0.5, ax=axs[1, 0], label='Low')
sns.kdeplot(df.Close, shade=True, color='blue', alpha=0.5, ax=axs[1, 1], label='Close')
# Add legends
axs[0, 0].legend()
axs[0, 1].legend()
axs[1, 0].legend()
axs[1, 1].legend()
plt.show()
import plotly.express as px
from itertools import cycle
names= cycle(['Stock Open Price','Stock Close Price','Stock High Price','Stock Low Price'])
fig = px.line(df, x='Date', y=["Open",'Close',"High","Low"], labels={'Date': 'Date','value':'Stock value'},
title='IDEA.NS stock prices chart')
fig.update_layout( font_color='black',legend_title_text='Stock Parameters')
fig.for_each_trace(lambda t: t.update(name = next(names)))
fig.show()
fig = px.line(df, x='Date', y='Volume', labels={'Date': 'Date', 'Volume': 'Stock Volume'},
title='Stock Volume Over Time')
fig.update_layout(font_size=13,font_color='black')
fig.show()
ma50=df.Close.rolling(50).mean()
ma100=df.Close.rolling(100).mean()
ma100
0 NaN
1 NaN
2 NaN
3 NaN
4 NaN
...
3207 8.9595
3208 8.9980
3209 9.0345
3210 9.0705
3211 9.1080
Name: Close, Length: 3206, dtype: float64
plt.figure(figsize=(22,8))
plt.plot(df.Open, label='Open')
plt.plot(ma50, "yellow", label='MA50')
plt.plot(ma100, "r", label='MA100')
plt.title('Stock Close Over Time')
plt.grid(True)
plt.legend(loc='best', prop={'size': 16})
plt.show()
plt.figure(figsize=(22,8))
plt.plot(df.Close, label='Close')
plt.plot(ma50, "yellow", label='MA50')
plt.plot(ma100, "r", label='MA100')
plt.title('Stock Open Over Time')
plt.grid(True)
plt.legend(loc='best', prop={'size': 16})
plt.show()
plt.figure(figsize=(22,8))
plt.plot(df.High, label='High')
plt.plot(ma50, "yellow", label='MA50')
plt.plot(ma100, "r", label='MA100')
plt.title('Stock Close Over Time')
plt.grid(True)
plt.legend(loc='best', prop={'size': 16})
plt.show()
plt.figure(figsize=(22,8))
plt.plot(df.Low, label='low')
plt.plot(ma50, "yellow", label='MA50')
plt.plot(ma100, "r", label='MA100')
plt.title('Stock Open Over Time')
plt.grid(True)
plt.legend(loc='best', prop={'size': 16})
plt.show()
df1=df.Open
from sklearn.preprocessing import MinMaxScaler
Scaler=MinMaxScaler(feature_range=(0,1))
df1=Scaler.fit_transform(np.array(df1).reshape(-1,1))
df.shape
(3206, 7)
df1
array([[0.31745655],
[0.31216746],
[0.31871585],
...,
[0.06933845],
[0.06808534],
[0.06641454]])
training_size=int(len(df1)*0.70)
test_size=len(df1)-training_size
train_data,test_data=df1[0:training_size,:],df1[training_size:len(df1),:1]
print("train_data: ", train_data.shape)
print("test_data: ", test_data.shape)
train_data: (2244, 1) test_data: (962, 1)
def create_dataset(dataset, time_step=1):
dataX = [dataset[i:(i+time_step), 0] for i in range(len(dataset)-time_step-1)]
dataY = [dataset[i + time_step, 0] for i in range(len(dataset)-time_step-1)]
return np.array(dataX), np.array(dataY)
# reshape into X=t,t+1,t+2,t+3 and Y=t+4
time_step = 100
X_train, y_train = create_dataset(train_data, time_step)
X_test, y_test = create_dataset(test_data, time_step)
print("X_train: ", X_train.shape)
print("y_train: ", y_train.shape)
print("X_test: ", X_test.shape)
print("y_test", y_test.shape)
X_train: (2143, 100) y_train: (2143,) X_test: (861, 100) y_test (861,)
# reshape input to be [samples, time steps, features] which is required for LSTM
X_train =X_train.reshape(X_train.shape[0],X_train.shape[1] , 1)
X_test = X_test.reshape(X_test.shape[0],X_test.shape[1] , 1)
print("X_train: ", X_train.shape)
print("X_test: ", X_test.shape)
X_train: (2143, 100, 1) X_test: (861, 100, 1)
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import LSTM
tf.__version__
'2.14.0'
tf.keras.backend.clear_session()
model=Sequential()
model.add(LSTM(50,return_sequences=True,input_shape=(time_step,1)))
model.add(LSTM(50,return_sequences=True))
model.add(LSTM(50))
model.add(Dense(1))
model.compile(loss='mean_squared_error',optimizer='adam')
tf.keras.backend.clear_session(): This clears any old models or layers that might be in memory.
model=Sequential(): This starts building a new model.
model.add(LSTM(50,return_sequences=True,input_shape=(time_step,1))): This adds a layer to the model that can remember patterns in a sequence of data. It’s set up to look at time_step number of data points at a time.
model.add(LSTM(50,return_sequences=True)): This adds another layer that can remember patterns. This layer takes the output of the previous layer as its input.
model.add(LSTM(50)): This adds a third layer that can remember patterns. This layer only returns the final output, not the whole sequence.
model.add(Dense(1)): This adds a layer that connects everything together. It takes the output of the previous layer and transforms it into the final prediction.
model.compile(loss='mean_squared_error',optimizer='adam'): This sets up the model for training. It will use the ‘mean_squared_error’ method to measure how well the model is doing, and the ‘adam’ method to improve the model.
model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
lstm (LSTM) (None, 100, 50) 10400
lstm_1 (LSTM) (None, 100, 50) 20200
lstm_2 (LSTM) (None, 50) 20200
dense (Dense) (None, 1) 51
=================================================================
Total params: 50851 (198.64 KB)
Trainable params: 50851 (198.64 KB)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________
model.fit(X_train,y_train,validation_data=(X_test,y_test),epochs=100,batch_size=64,verbose=1)
Epoch 1/100 34/34 [==============================] - 20s 359ms/step - loss: 0.0407 - val_loss: 0.0038 Epoch 2/100 34/34 [==============================] - 10s 304ms/step - loss: 0.0024 - val_loss: 2.7771e-04 Epoch 3/100 34/34 [==============================] - 11s 310ms/step - loss: 0.0015 - val_loss: 9.8978e-05 Epoch 4/100 34/34 [==============================] - 11s 314ms/step - loss: 0.0014 - val_loss: 1.1492e-04 Epoch 5/100 34/34 [==============================] - 10s 303ms/step - loss: 0.0014 - val_loss: 9.0473e-05 Epoch 6/100 34/34 [==============================] - 10s 309ms/step - loss: 0.0013 - val_loss: 9.0994e-05 Epoch 7/100 34/34 [==============================] - 11s 312ms/step - loss: 0.0012 - val_loss: 1.0977e-04 Epoch 8/100 34/34 [==============================] - 11s 317ms/step - loss: 0.0012 - val_loss: 8.6356e-05 Epoch 9/100 34/34 [==============================] - 11s 318ms/step - loss: 0.0011 - val_loss: 8.9304e-05 Epoch 10/100 34/34 [==============================] - 10s 307ms/step - loss: 0.0010 - val_loss: 1.3621e-04 Epoch 11/100 34/34 [==============================] - 9s 275ms/step - loss: 9.4824e-04 - val_loss: 1.0106e-04 Epoch 12/100 34/34 [==============================] - 10s 285ms/step - loss: 9.0191e-04 - val_loss: 7.1865e-05 Epoch 13/100 34/34 [==============================] - 11s 318ms/step - loss: 9.6204e-04 - val_loss: 1.3864e-04 Epoch 14/100 34/34 [==============================] - 10s 304ms/step - loss: 8.1078e-04 - val_loss: 8.1983e-05 Epoch 15/100 34/34 [==============================] - 11s 313ms/step - loss: 8.5681e-04 - val_loss: 6.9969e-05 Epoch 16/100 34/34 [==============================] - 10s 308ms/step - loss: 7.7695e-04 - val_loss: 1.0921e-04 Epoch 17/100 34/34 [==============================] - 11s 315ms/step - loss: 7.0167e-04 - val_loss: 6.2738e-05 Epoch 18/100 34/34 [==============================] - 11s 323ms/step - loss: 7.0647e-04 - val_loss: 7.0629e-05 Epoch 19/100 34/34 [==============================] - 11s 319ms/step - loss: 6.9544e-04 - val_loss: 5.6737e-05 Epoch 20/100 34/34 [==============================] - 11s 322ms/step - loss: 6.4603e-04 - val_loss: 7.1928e-05 Epoch 21/100 34/34 [==============================] - 11s 324ms/step - loss: 6.5815e-04 - val_loss: 5.8108e-05 Epoch 22/100 34/34 [==============================] - 11s 319ms/step - loss: 6.1573e-04 - val_loss: 6.9428e-05 Epoch 23/100 34/34 [==============================] - 11s 327ms/step - loss: 6.0622e-04 - val_loss: 5.0099e-05 Epoch 24/100 34/34 [==============================] - 11s 324ms/step - loss: 5.9951e-04 - val_loss: 1.2178e-04 Epoch 25/100 34/34 [==============================] - 11s 315ms/step - loss: 6.8743e-04 - val_loss: 8.7431e-05 Epoch 26/100 34/34 [==============================] - 11s 317ms/step - loss: 5.8004e-04 - val_loss: 6.0611e-05 Epoch 27/100 34/34 [==============================] - 11s 317ms/step - loss: 5.7465e-04 - val_loss: 4.5974e-05 Epoch 28/100 34/34 [==============================] - 11s 312ms/step - loss: 5.3599e-04 - val_loss: 4.9688e-05 Epoch 29/100 34/34 [==============================] - 11s 312ms/step - loss: 5.3899e-04 - val_loss: 4.6389e-05 Epoch 30/100 34/34 [==============================] - 11s 317ms/step - loss: 5.7204e-04 - val_loss: 9.4227e-05 Epoch 31/100 34/34 [==============================] - 10s 292ms/step - loss: 5.1887e-04 - val_loss: 8.6668e-05 Epoch 32/100 34/34 [==============================] - 10s 291ms/step - loss: 5.3073e-04 - val_loss: 4.3157e-05 Epoch 33/100 34/34 [==============================] - 10s 306ms/step - loss: 5.0894e-04 - val_loss: 4.2498e-05 Epoch 34/100 34/34 [==============================] - 10s 295ms/step - loss: 5.0502e-04 - val_loss: 5.1741e-05 Epoch 35/100 34/34 [==============================] - 11s 319ms/step - loss: 4.6187e-04 - val_loss: 5.3396e-05 Epoch 36/100 34/34 [==============================] - 11s 329ms/step - loss: 5.2586e-04 - val_loss: 5.5566e-05 Epoch 37/100 34/34 [==============================] - 11s 318ms/step - loss: 5.2210e-04 - val_loss: 5.3722e-05 Epoch 38/100 34/34 [==============================] - 11s 322ms/step - loss: 4.4619e-04 - val_loss: 5.6954e-05 Epoch 39/100 34/34 [==============================] - 11s 323ms/step - loss: 4.2356e-04 - val_loss: 4.4217e-05 Epoch 40/100 34/34 [==============================] - 10s 302ms/step - loss: 4.5301e-04 - val_loss: 4.7083e-05 Epoch 41/100 34/34 [==============================] - 10s 292ms/step - loss: 4.1361e-04 - val_loss: 3.6614e-05 Epoch 42/100 34/34 [==============================] - 11s 315ms/step - loss: 4.9114e-04 - val_loss: 5.8949e-05 Epoch 43/100 34/34 [==============================] - 10s 310ms/step - loss: 4.9952e-04 - val_loss: 4.8554e-05 Epoch 44/100 34/34 [==============================] - 11s 321ms/step - loss: 4.2608e-04 - val_loss: 5.4590e-05 Epoch 45/100 34/34 [==============================] - 11s 327ms/step - loss: 3.8870e-04 - val_loss: 4.6298e-05 Epoch 46/100 34/34 [==============================] - 11s 322ms/step - loss: 3.7003e-04 - val_loss: 5.2742e-05 Epoch 47/100 34/34 [==============================] - 11s 322ms/step - loss: 3.8733e-04 - val_loss: 4.3734e-05 Epoch 48/100 34/34 [==============================] - 11s 326ms/step - loss: 4.9354e-04 - val_loss: 5.3816e-05 Epoch 49/100 34/34 [==============================] - 11s 313ms/step - loss: 3.6431e-04 - val_loss: 5.9958e-05 Epoch 50/100 34/34 [==============================] - 10s 307ms/step - loss: 3.8859e-04 - val_loss: 3.5130e-05 Epoch 51/100 34/34 [==============================] - 10s 304ms/step - loss: 3.7004e-04 - val_loss: 3.7626e-05 Epoch 52/100 34/34 [==============================] - 10s 294ms/step - loss: 3.6107e-04 - val_loss: 3.6198e-05 Epoch 53/100 34/34 [==============================] - 11s 316ms/step - loss: 3.3055e-04 - val_loss: 3.1521e-05 Epoch 54/100 34/34 [==============================] - 11s 328ms/step - loss: 3.2187e-04 - val_loss: 3.9860e-05 Epoch 55/100 34/34 [==============================] - 11s 327ms/step - loss: 4.1272e-04 - val_loss: 3.0501e-05 Epoch 56/100 34/34 [==============================] - 11s 326ms/step - loss: 3.9849e-04 - val_loss: 6.9342e-05 Epoch 57/100 34/34 [==============================] - 11s 323ms/step - loss: 3.1052e-04 - val_loss: 3.0283e-05 Epoch 58/100 34/34 [==============================] - 11s 330ms/step - loss: 3.3242e-04 - val_loss: 2.9108e-05 Epoch 59/100 34/34 [==============================] - 11s 332ms/step - loss: 3.0027e-04 - val_loss: 3.2512e-05 Epoch 60/100 34/34 [==============================] - 11s 325ms/step - loss: 3.2341e-04 - val_loss: 3.0677e-05 Epoch 61/100 34/34 [==============================] - 11s 331ms/step - loss: 2.8944e-04 - val_loss: 3.5484e-05 Epoch 62/100 34/34 [==============================] - 11s 331ms/step - loss: 3.1990e-04 - val_loss: 2.8282e-05 Epoch 63/100 34/34 [==============================] - 11s 329ms/step - loss: 3.3296e-04 - val_loss: 3.4113e-05 Epoch 64/100 34/34 [==============================] - 11s 327ms/step - loss: 4.6548e-04 - val_loss: 6.1087e-05 Epoch 65/100 34/34 [==============================] - 11s 321ms/step - loss: 3.2797e-04 - val_loss: 4.7215e-05 Epoch 66/100 34/34 [==============================] - 11s 317ms/step - loss: 2.9631e-04 - val_loss: 2.7795e-05 Epoch 67/100 34/34 [==============================] - 11s 329ms/step - loss: 2.8704e-04 - val_loss: 6.0813e-05 Epoch 68/100 34/34 [==============================] - 11s 320ms/step - loss: 3.2117e-04 - val_loss: 3.2808e-05 Epoch 69/100 34/34 [==============================] - 11s 322ms/step - loss: 2.8712e-04 - val_loss: 2.6467e-05 Epoch 70/100 34/34 [==============================] - 11s 331ms/step - loss: 3.1922e-04 - val_loss: 3.0806e-05 Epoch 71/100 34/34 [==============================] - 11s 329ms/step - loss: 3.0502e-04 - val_loss: 2.7545e-05 Epoch 72/100 34/34 [==============================] - 11s 327ms/step - loss: 2.8816e-04 - val_loss: 2.5383e-05 Epoch 73/100 34/34 [==============================] - 11s 319ms/step - loss: 3.2153e-04 - val_loss: 5.6729e-05 Epoch 74/100 34/34 [==============================] - 11s 329ms/step - loss: 2.7231e-04 - val_loss: 3.7355e-05 Epoch 75/100 34/34 [==============================] - 11s 321ms/step - loss: 2.5087e-04 - val_loss: 2.8689e-05 Epoch 76/100 34/34 [==============================] - 11s 325ms/step - loss: 2.6238e-04 - val_loss: 3.1594e-05 Epoch 77/100 34/34 [==============================] - 11s 323ms/step - loss: 2.6235e-04 - val_loss: 2.6123e-05 Epoch 78/100 34/34 [==============================] - 11s 330ms/step - loss: 2.5165e-04 - val_loss: 2.4417e-05 Epoch 79/100 34/34 [==============================] - 11s 325ms/step - loss: 2.6554e-04 - val_loss: 2.3992e-05 Epoch 80/100 34/34 [==============================] - 11s 327ms/step - loss: 2.6998e-04 - val_loss: 8.5000e-05 Epoch 81/100 34/34 [==============================] - 11s 327ms/step - loss: 2.6286e-04 - val_loss: 2.4477e-05 Epoch 82/100 34/34 [==============================] - 8s 247ms/step - loss: 3.7202e-04 - val_loss: 4.9306e-05 Epoch 83/100 34/34 [==============================] - 11s 321ms/step - loss: 2.9772e-04 - val_loss: 2.4775e-05 Epoch 84/100 34/34 [==============================] - 11s 328ms/step - loss: 2.5416e-04 - val_loss: 2.6416e-05 Epoch 85/100 34/34 [==============================] - 111s 3s/step - loss: 2.7058e-04 - val_loss: 3.2514e-05 Epoch 86/100 34/34 [==============================] - 10s 305ms/step - loss: 2.5983e-04 - val_loss: 2.4099e-05 Epoch 87/100 34/34 [==============================] - 11s 322ms/step - loss: 2.9712e-04 - val_loss: 1.0564e-04 Epoch 88/100 34/34 [==============================] - 11s 324ms/step - loss: 3.2716e-04 - val_loss: 2.3412e-05 Epoch 89/100 34/34 [==============================] - 11s 321ms/step - loss: 3.6603e-04 - val_loss: 2.5451e-05 Epoch 90/100 34/34 [==============================] - 11s 328ms/step - loss: 2.7522e-04 - val_loss: 3.7760e-05 Epoch 91/100 34/34 [==============================] - 11s 323ms/step - loss: 2.3231e-04 - val_loss: 2.3983e-05 Epoch 92/100 34/34 [==============================] - 11s 325ms/step - loss: 2.4959e-04 - val_loss: 3.1213e-05 Epoch 93/100 34/34 [==============================] - 11s 331ms/step - loss: 2.7979e-04 - val_loss: 3.6533e-05 Epoch 94/100 34/34 [==============================] - 11s 323ms/step - loss: 2.5164e-04 - val_loss: 3.1772e-05 Epoch 95/100 34/34 [==============================] - 11s 327ms/step - loss: 2.5971e-04 - val_loss: 2.5224e-05 Epoch 96/100 34/34 [==============================] - 11s 327ms/step - loss: 2.4171e-04 - val_loss: 3.3931e-05 Epoch 97/100 34/34 [==============================] - 11s 329ms/step - loss: 2.5325e-04 - val_loss: 2.6800e-05 Epoch 98/100 34/34 [==============================] - 11s 333ms/step - loss: 2.3615e-04 - val_loss: 2.2703e-05 Epoch 99/100 34/34 [==============================] - 11s 329ms/step - loss: 2.4124e-04 - val_loss: 2.3569e-05 Epoch 100/100 34/34 [==============================] - 11s 335ms/step - loss: 2.3454e-04 - val_loss: 2.6811e-05
<keras.src.callbacks.History at 0x24912aabd00>
### Lets Do the prediction and check performance metrics
train_predict=model.predict(X_train)
test_predict=model.predict(X_test)
67/67 [==============================] - 5s 54ms/step 27/27 [==============================] - 1s 49ms/step
train_predict.shape, test_predict.shape
((2143, 1), (861, 1))
# Transform back to original form
train_predict = Scaler.inverse_transform(train_predict)
test_predict = Scaler.inverse_transform(test_predict)
original_ytrain = Scaler.inverse_transform(y_train.reshape(-1,1))
original_ytest = Scaler.inverse_transform(y_test.reshape(-1,1))
from sklearn.metrics import mean_squared_error, mean_absolute_error, explained_variance_score, r2_score
from sklearn.metrics import mean_poisson_deviance, mean_gamma_deviance, accuracy_score
math.sqrt(mean_squared_error(y_train,train_predict))
65.88828471327315
math.sqrt(mean_squared_error(y_test,test_predict))
9.661302158777342
# Evaluation metrices RMSE and MAE
print("Train data RMSE: ", math.sqrt(mean_squared_error(original_ytrain,train_predict)))
print("Train data MSE: ", mean_squared_error(original_ytrain,train_predict))
print("Traint data MAE: ", mean_absolute_error(original_ytrain,train_predict))
print("-------------------------------------------------------------------------------------")
print("Test data RMSE: ", math.sqrt(mean_squared_error(original_ytest,test_predict)))
print("Test data MSE: ", mean_squared_error(original_ytest,test_predict))
print("Test data MAE: ", mean_absolute_error(original_ytest,test_predict))
Train data RMSE: 1.7947650549226102 Train data MSE: 3.22118160237136 Traint data MAE: 1.2684575013328192 ------------------------------------------------------------------------------------- Test data RMSE: 0.6198080165432677 Test data MSE: 0.38416197737129953 Test data MAE: 0.4741334131524665
# shift train predictions for plotting
look_back=time_step
trainPredictPlot = np.empty_like(df1)
trainPredictPlot[:, :] = np.nan
trainPredictPlot[look_back:len(train_predict)+look_back, :] = train_predict
print("Train predicted data: ", trainPredictPlot.shape)
# shift test predictions for plotting
testPredictPlot = np.empty_like(df1)
testPredictPlot[:, :] = np.nan
testPredictPlot[len(train_predict)+(look_back*2)+1:len(df1)-1, :] = test_predict
print("Test predicted data: ", testPredictPlot.shape)
names = cycle(['Original Open price','Train predicted Open price','Test predicted Open price'])
plotdf = pd.DataFrame({'Date': df['Date'],
'original_Open': df['Open'],
'train_predicted_Open': trainPredictPlot.reshape(1,-1)[0].tolist(),
'test_predicted_Open': testPredictPlot.reshape(1,-1)[0].tolist()})
fig = px.line(plotdf,x=plotdf['Date'], y=[plotdf['original_Open'],plotdf['train_predicted_Open'],
plotdf['test_predicted_Open']],
labels={'value':'Stock price','Date': 'Date'})
fig.update_layout(title_text='Comparision between original Open price vs predicted Open price',
plot_bgcolor='white', font_size=15, font_color='black', legend_title_text='Open Price')
fig.for_each_trace(lambda t: t.update(name = next(names)))
fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)
fig.show()
Train predicted data: (3206, 1) Test predicted data: (3206, 1)
len(test_data)
962
x_input=test_data[862:].reshape(1,-1)
x_input.shape
(1, 100)
temp_input=list(x_input)
temp_input=temp_input[0].tolist()
temp_input
[0.037593133856368005, 0.03717543236907503, 0.03634002939448906, 0.04260555170388372, 0.03926393980553991, 0.038846238318246934, 0.04051704426741884, 0.04552946211493458, 0.04344095467846969, 0.0430232531911767, 0.04260555170388372, 0.040934745754711815, 0.03926393980553991, 0.04177014872929777, 0.04009934278012586, 0.04009934278012586, 0.040934745754711815, 0.04009934278012586, 0.03968164129283289, 0.04009934278012586, 0.040934745754711815, 0.04009934278012586, 0.03968164129283289, 0.03968164129283289, 0.04009934278012586, 0.03926393980553991, 0.03926393980553991, 0.038846238318246934, 0.03801083534366098, 0.03926393980553991, 0.040934745754711815, 0.04177014872929777, 0.04177014872929777, 0.040934745754711815, 0.04385865616576266, 0.04385865616576266, 0.04469405914034863, 0.05304808888620818, 0.04887107401327841, 0.04720026806410649, 0.04720026806410649, 0.046364865089520535, 0.04344095467846969, 0.04344095467846969, 0.04678256657681351, 0.047617969551399464, 0.04720026806410649, 0.045947163602227545, 0.04511176062764159, 0.04511176062764159, 0.04344095467846969, 0.042187850216590744, 0.042187850216590744, 0.04135244724200479, 0.04051704426741884, 0.04260555170388372, 0.0430232531911767, 0.04469405914034863, 0.05179498442432924, 0.05263038739891519, 0.05179498442432924, 0.053883491860794136, 0.053883491860794136, 0.0618198201193607, 0.06140211863206774, 0.06098441714477475, 0.06641453647958347, 0.06557913350499751, 0.06641453647958347, 0.07268005878897812, 0.06516143201770452, 0.07017384986522025, 0.0693384468906343, 0.08186949150942364, 0.06892074540334132, 0.06892074540334132, 0.06850304391604835, 0.07226235730168515, 0.07268005878897812, 0.0789455810983728, 0.07727477514920088, 0.07560396920002897, 0.07560396920002897, 0.07727477514920088, 0.07393316325085707, 0.0710092528398062, 0.06683223796687643, 0.0693384468906343, 0.0710092528398062, 0.07685707366190792, 0.07602167068732196, 0.0785278796110798, 0.07643937217461493, 0.07727477514920088, 0.07435086473815003, 0.07769247663649387, 0.07602167068732196, 0.0693384468906343, 0.06808534242875537, 0.06641453647958347]
# demonstrating prediction for next 10 days
from numpy import array
lst_output=[]
n_step=100
i=0
while(i<30):
if(len(temp_input)>100):
x_input=np.array(temp_input[1:])
print("{} day input {}".format(i,x_input))
x_input=x_input.reshape(1,-1)
x_input=x_input.reshape((1,n_step,1))
yhat=model.predict(x_input,verbose=0)
print("{} day input {}".format(i,yhat))
temp_input.extend(yhat[0].tolist())
temp_input=temp_input[1:]
lst_output.extend(yhat.tolist())
i=i+1
else:
x_input=x_input.reshape((1,n_step,1))
yhat=model.predict(x_input,verbose=0)
print(yhat[0])
temp_input.extend(yhat[0].tolist())
print(len(temp_input))
lst_output.extend(yhat.tolist())
i=i+1
print(lst_output)
[0.07203833] 101 1 day input [0.03717543 0.03634003 0.04260555 0.03926394 0.03884624 0.04051704 0.04552946 0.04344095 0.04302325 0.04260555 0.04093475 0.03926394 0.04177015 0.04009934 0.04009934 0.04093475 0.04009934 0.03968164 0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833] 1 day input [[0.07127579]] 2 day input [0.03634003 0.04260555 0.03926394 0.03884624 0.04051704 0.04552946 0.04344095 0.04302325 0.04260555 0.04093475 0.03926394 0.04177015 0.04009934 0.04009934 0.04093475 0.04009934 0.03968164 0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579] 2 day input [[0.07144839]] 3 day input [0.04260555 0.03926394 0.03884624 0.04051704 0.04552946 0.04344095 0.04302325 0.04260555 0.04093475 0.03926394 0.04177015 0.04009934 0.04009934 0.04093475 0.04009934 0.03968164 0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839] 3 day input [[0.0720036]] 4 day input [0.03926394 0.03884624 0.04051704 0.04552946 0.04344095 0.04302325 0.04260555 0.04093475 0.03926394 0.04177015 0.04009934 0.04009934 0.04093475 0.04009934 0.03968164 0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 ] 4 day input [[0.0726905]] 5 day input [0.03884624 0.04051704 0.04552946 0.04344095 0.04302325 0.04260555 0.04093475 0.03926394 0.04177015 0.04009934 0.04009934 0.04093475 0.04009934 0.03968164 0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 ] 5 day input [[0.07341842]] 6 day input [0.04051704 0.04552946 0.04344095 0.04302325 0.04260555 0.04093475 0.03926394 0.04177015 0.04009934 0.04009934 0.04093475 0.04009934 0.03968164 0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842] 6 day input [[0.07415743]] 7 day input [0.04552946 0.04344095 0.04302325 0.04260555 0.04093475 0.03926394 0.04177015 0.04009934 0.04009934 0.04093475 0.04009934 0.03968164 0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743] 7 day input [[0.0748966]] 8 day input [0.04344095 0.04302325 0.04260555 0.04093475 0.03926394 0.04177015 0.04009934 0.04009934 0.04093475 0.04009934 0.03968164 0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 ] 8 day input [[0.07563051]] 9 day input [0.04302325 0.04260555 0.04093475 0.03926394 0.04177015 0.04009934 0.04009934 0.04093475 0.04009934 0.03968164 0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051] 9 day input [[0.07635623]] 10 day input [0.04260555 0.04093475 0.03926394 0.04177015 0.04009934 0.04009934 0.04093475 0.04009934 0.03968164 0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623] 10 day input [[0.07707241]] 11 day input [0.04093475 0.03926394 0.04177015 0.04009934 0.04009934 0.04093475 0.04009934 0.03968164 0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241] 11 day input [[0.07777882]] 12 day input [0.03926394 0.04177015 0.04009934 0.04009934 0.04093475 0.04009934 0.03968164 0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241 0.07777882] 12 day input [[0.07847603]] 13 day input [0.04177015 0.04009934 0.04009934 0.04093475 0.04009934 0.03968164 0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241 0.07777882 0.07847603] 13 day input [[0.07916494]] 14 day input [0.04009934 0.04009934 0.04093475 0.04009934 0.03968164 0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241 0.07777882 0.07847603 0.07916494] 14 day input [[0.07984665]] 15 day input [0.04009934 0.04093475 0.04009934 0.03968164 0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241 0.07777882 0.07847603 0.07916494 0.07984665] 15 day input [[0.08052213]] 16 day input [0.04093475 0.04009934 0.03968164 0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241 0.07777882 0.07847603 0.07916494 0.07984665 0.08052213] 16 day input [[0.0811922]] 17 day input [0.04009934 0.03968164 0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241 0.07777882 0.07847603 0.07916494 0.07984665 0.08052213 0.0811922 ] 17 day input [[0.08185746]] 18 day input [0.03968164 0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241 0.07777882 0.07847603 0.07916494 0.07984665 0.08052213 0.0811922 0.08185746] 18 day input [[0.08251831]] 19 day input [0.04009934 0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241 0.07777882 0.07847603 0.07916494 0.07984665 0.08052213 0.0811922 0.08185746 0.08251831] 19 day input [[0.08317492]] 20 day input [0.04093475 0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241 0.07777882 0.07847603 0.07916494 0.07984665 0.08052213 0.0811922 0.08185746 0.08251831 0.08317492] 20 day input [[0.08382733]] 21 day input [0.04009934 0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241 0.07777882 0.07847603 0.07916494 0.07984665 0.08052213 0.0811922 0.08185746 0.08251831 0.08317492 0.08382733] 21 day input [[0.08447549]] 22 day input [0.03968164 0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241 0.07777882 0.07847603 0.07916494 0.07984665 0.08052213 0.0811922 0.08185746 0.08251831 0.08317492 0.08382733 0.08447549] 22 day input [[0.08511929]] 23 day input [0.03968164 0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241 0.07777882 0.07847603 0.07916494 0.07984665 0.08052213 0.0811922 0.08185746 0.08251831 0.08317492 0.08382733 0.08447549 0.08511929] 23 day input [[0.08575858]] 24 day input [0.04009934 0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241 0.07777882 0.07847603 0.07916494 0.07984665 0.08052213 0.0811922 0.08185746 0.08251831 0.08317492 0.08382733 0.08447549 0.08511929 0.08575858] 24 day input [[0.08639324]] 25 day input [0.03926394 0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241 0.07777882 0.07847603 0.07916494 0.07984665 0.08052213 0.0811922 0.08185746 0.08251831 0.08317492 0.08382733 0.08447549 0.08511929 0.08575858 0.08639324] 25 day input [[0.08702318]] 26 day input [0.03926394 0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241 0.07777882 0.07847603 0.07916494 0.07984665 0.08052213 0.0811922 0.08185746 0.08251831 0.08317492 0.08382733 0.08447549 0.08511929 0.08575858 0.08639324 0.08702318] 26 day input [[0.08764835]] 27 day input [0.03884624 0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241 0.07777882 0.07847603 0.07916494 0.07984665 0.08052213 0.0811922 0.08185746 0.08251831 0.08317492 0.08382733 0.08447549 0.08511929 0.08575858 0.08639324 0.08702318 0.08764835] 27 day input [[0.0882688]] 28 day input [0.03801084 0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241 0.07777882 0.07847603 0.07916494 0.07984665 0.08052213 0.0811922 0.08185746 0.08251831 0.08317492 0.08382733 0.08447549 0.08511929 0.08575858 0.08639324 0.08702318 0.08764835 0.0882688 ] 28 day input [[0.08888458]] 29 day input [0.03926394 0.04093475 0.04177015 0.04177015 0.04093475 0.04385866 0.04385866 0.04469406 0.05304809 0.04887107 0.04720027 0.04720027 0.04636487 0.04344095 0.04344095 0.04678257 0.04761797 0.04720027 0.04594716 0.04511176 0.04511176 0.04344095 0.04218785 0.04218785 0.04135245 0.04051704 0.04260555 0.04302325 0.04469406 0.05179498 0.05263039 0.05179498 0.05388349 0.05388349 0.06181982 0.06140212 0.06098442 0.06641454 0.06557913 0.06641454 0.07268006 0.06516143 0.07017385 0.06933845 0.08186949 0.06892075 0.06892075 0.06850304 0.07226236 0.07268006 0.07894558 0.07727478 0.07560397 0.07560397 0.07727478 0.07393316 0.07100925 0.06683224 0.06933845 0.07100925 0.07685707 0.07602167 0.07852788 0.07643937 0.07727478 0.07435086 0.07769248 0.07602167 0.06933845 0.06808534 0.06641454 0.07203833 0.07127579 0.07144839 0.0720036 0.0726905 0.07341842 0.07415743 0.0748966 0.07563051 0.07635623 0.07707241 0.07777882 0.07847603 0.07916494 0.07984665 0.08052213 0.0811922 0.08185746 0.08251831 0.08317492 0.08382733 0.08447549 0.08511929 0.08575858 0.08639324 0.08702318 0.08764835 0.0882688 0.08888458] 29 day input [[0.08949577]] [[0.07203833013772964], [0.07127578556537628], [0.07144839316606522], [0.07200359553098679], [0.07269050180912018], [0.07341842353343964], [0.07415743172168732], [0.07489660382270813], [0.07563050836324692], [0.07635623216629028], [0.07707241177558899], [0.07777882367372513], [0.07847602665424347], [0.07916494458913803], [0.07984665036201477], [0.08052213490009308], [0.08119219541549683], [0.08185745775699615], [0.0825183093547821], [0.0831749215722084], [0.08382733166217804], [0.08447548747062683], [0.08511929214000702], [0.08575858175754547], [0.08639323711395264], [0.08702318370342255], [0.08764834702014923], [0.08826880156993866], [0.08888457715511322], [0.08949577063322067]]
day_new=np.arange(1,101)
day_pred=np.arange(101,131)
len(df1)
3206
df3=df1.tolist()
df3.extend(lst_output)
plt.plot(day_new,Scaler.inverse_transform(df1[3106:]))
plt.plot(day_pred,Scaler.inverse_transform(lst_output))
[<matplotlib.lines.Line2D at 0x2491c26dd60>]
df3=df1.tolist()
df3.extend(lst_output)
plt.plot(df3[3100:])
[<matplotlib.lines.Line2D at 0x2491c34cc40>]
x_input=test_data[len(test_data)-time_step:].reshape(1,-1)
temp_input=list(x_input)
temp_input=temp_input[0].tolist()
from numpy import array
lst_output=[]
n_steps=time_step
i=0
pred_days = 30
while(i<pred_days):
if(len(temp_input)>time_step):
x_input=np.array(temp_input[1:])
#print("{} day input {}".format(i,x_input))
x_input = x_input.reshape(1,-1)
x_input = x_input.reshape((1, n_steps, 1))
yhat = model.predict(x_input, verbose=0)
#print("{} day output {}".format(i,yhat))
temp_input.extend(yhat[0].tolist())
temp_input=temp_input[1:]
#print(temp_input)
lst_output.extend(yhat.tolist())
i=i+1
else:
x_input = x_input.reshape((1, n_steps,1))
yhat = model.predict(x_input, verbose=0)
temp_input.extend(yhat[0].tolist())
lst_output.extend(yhat.tolist())
i=i+1
print("Output of predicted next days: ", len(lst_output))
Output of predicted next days: 30
last_days=np.arange(1,time_step+1)
day_pred=np.arange(time_step+1,time_step+pred_days+1)
print(last_days)
print(day_pred)
[ 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100] [101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130]
temp_mat = np.empty((len(last_days)+pred_days+1,1))
temp_mat[:] = np.nan
temp_mat = temp_mat.reshape(1,-1).tolist()[0]
last_original_days_value = temp_mat
next_predicted_days_value = temp_mat
last_original_days_value[0:time_step+1] = Scaler.inverse_transform(df1[len(df1)-time_step:]).reshape(1,-1).tolist()[0]
next_predicted_days_value[time_step+1:] = Scaler.inverse_transform(np.array(lst_output).reshape(-1,1)).reshape(1,-1).tolist()[0]
new_pred_plot = pd.DataFrame({
'last_original_days_value':last_original_days_value,
'next_predicted_days_value':next_predicted_days_value
})
names = cycle(['Last 100 days close price','Predicted next 30 days close price'])
fig = px.line(new_pred_plot,x=new_pred_plot.index, y=[new_pred_plot['last_original_days_value'],
new_pred_plot['next_predicted_days_value']],
labels={'value': 'Stock price','index': 'Timestamp'})
fig.update_layout(title_text='Compare last 100 days vs next 30 days',
plot_bgcolor='white', font_size=15, font_color='black', legend_title_text='Close Price')
fig.for_each_trace(lambda t: t.update(name = next(names)))
fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)
fig.show()
lstmgrudf=df1.tolist()
lstmgrudf.extend((np.array(lst_output).reshape(-1,1)).tolist())
lstmgrudf=Scaler.inverse_transform(lstmgrudf).reshape(1,-1).tolist()[0]
names = cycle(['Close price'])
fig = px.line(lstmgrudf,labels={'value': 'Stock price','index': 'Timestamp'})
fig.update_layout(title_text='Plotting whole closing stock price with prediction',
plot_bgcolor='white', font_size=15, font_color='black', legend_title_text='Stock')
fig.for_each_trace(lambda t: t.update(name = next(names)))
fig.update_xaxes(showgrid=False)
fig.update_yaxes(showgrid=False)
fig.show()